perm filename ARPA87.TEX[PRO,JMC] blob sn#873746 filedate 1989-06-30 generic text, type C, neo UTF8
COMMENT ⊗   VALID 00019 PAGES
C REC  PAGE   DESCRIPTION
C00001 00001
C00003 00002	%macros
C00006 00003	%  using plain tex
C00007 00004	\centerline{\Mtitle Basic Research}
C00009 00005	\section{Introduction}
C00011 00006	\section{Common Sense Knowledge and Reasoning} % jmc & val
C00047 00007	\section{Formalisms for Reasoning about Programs}  % arp86.clt[1,clt]
C00080 00008	\section{Experiments in automatic proof-checking}
C00084 00009	\section{Reasoning about processes occurring in time}
C00097 00010	\section{The Formal Reasoning Group}
C00100 00011	\subsection{Biography of John McCarthy}
C00121 00012	\subsection{Biography of Lester Earnest}
C00128 00013	\subsection{Biography of Vladimir Lifschitz}
C00138 00014	\subsection{Biography of Carolyn Talcott}
C00145 00015	\subsection{Biography of Natarajan Shankar}
C00150 00016	\subsection{Biography of Yoav Shoham}
C00170 00017	\section{References}
C00180 00018	% now we make our own control sequences for 0 and 1
C00181 00019	\newcount\psectCounter
C00188 ENDMK
C⊗;
%macros
\magnification=\magstep1
\parskip 8truept plus 1truept minus 1truept
\font\Mtitle=cmr7 scaled \magstep4 % for titles

\newcount\scount
\newcount\subcount
\newcount\subsubcount

\def\reset{\subcount=0\subsubcount=0}
\def\section#1{\bigbreak\global\advance\scount 1
\leftline{\bf \the\scount. \quad \bf #1}
\reset\smallskip}

\def\subsection#1{\bigbreak\global\advance\subcount 1
   \leftline{\quad\bf \the\scount.\the\subcount.\bf \quad#1}        
   \smallskip}

\def\subsubsection#1{\bigbreak\global\advance\subsubcount 1
   \leftline{\qquad\bf \the\scount.\the\subcount.\the\subsubcount.\bf \quad#1}
   \smallskip}


\def\sect#1.{\vskip 20pt plus20pt minus 7pt\penalty -100
   \secta#1.}
\def\secta#1.{\noindent{\bf \count1.\quad #1}\par\penalty 1000\advcount1\par
\vskip 5pt plus4pt minus 3pt\penalty 1000}

\def\mystrut{\vrule height 12.0truept depth4.0truept width0pt}
%\setcount1 1
%\setcount0 1
\count1=1
\count0=1

\def\ssect#1.{\yyskip{\bf\penalty -50\hbox{\vbox{\hbox{#1}}}
\penalty 800}} 
\def \ni{\noindent}

\def\yyskip{\penalty-100\vskip8pt plus6pt minus4pt}
\def\yskip{\penalty-50\vskip 3pt plus 3pt minus 2pt}

%\def\textindent#1{\noindent\hbox to 19pt{\hskip 0pt plus 1000pt minus 1000pt#1\ 
%\!}}
%\def\hang{\hangindent 19pt}

%\def\numitem#1{\yskip\hang\textindent{#1}}

\def\disleft#1:#2:#3\par{\par\hangindent#1\noindent
			 \hbox to #1{#2 \hfill \hskip .1em}\ignorespaces#3\par}
\def\display#1:#2:#3\par{\par\hangindent #1 \noindent
			\hbox to #1{\hfill #2 \hskip .1em}\ignorespaces#3 \par}
\def\adx#1:#2\par{\par\halign{\hskip #1##\hfill\cr #2}\par}

% now we make our own control sequences for 0 and 1
\catcode `@ = 11
\let\pt = \p@
\let\zpt = \z@
\let\zeroskip = \z@skip
\catcode `@ = 12

\def\noblackbox{\overfullrule \zpt}
%  using plain tex

\mag=\magstep1
\hsize=6.0 true in
\vsize=8.75 true in 
%  \vsize=8.50 true in 
%  \voffset= .25 true in
\hoffset= .5 true in
\raggedbottom

%these are CLT macros for her part
% simple macro to make lines and spaces real
\def\beginlsverbatim{% 
\begingroup
\parindent0pt
\parskip 0pt plus 1pt
\def\par{\leavevmode\endgraf}%
\obeylines%
\obeyspaces%
}
\def\endlsverbatim{\endgroup}
\def\spacer{\hskip 2pt}
{\obeyspaces\global\let =\spacer}


\def\pubitm#1#2#3\par{\par\hangafter 1\hangindent 20pt%
    \noindent#1\quad{\bf #2}: \ignorespaces #3}

\noblackbox

\centerline{\Mtitle Basic Research}
\vskip .2truein
\centerline{\Mtitle in}
\vskip .2truein
\centerline{\Mtitle Artificial Intelligence}
\vskip .2truein
\centerline{\Mtitle and}
\vskip .2truein
\centerline{\Mtitle Formal Reasoning}

\vfill

\disleft 60truept:{Personnel:}:
John~McCarthy, PI, Prof. of Computer Science\hfil\break
Lester~Earnest, Executive Officer and Senior Research Associate\hfil\break
Vladimir~Lifschitz, Senior Research Associate\hfil\break
Carolyn~Talcott, Research Associate\hfil\break
Natarajan~Shankar, Research Associate\hfil\break
Yoav~Shoham, Assist. Prof. of Computer Science\hfil\break
%Joseph~Weening, Research Assistant\hfil\break
Robert~Givan, Research Assistant\hfil\break
Gian-Luigi~Bellin, Research Assistant\hfil\break

\vfill\eject
\section{Introduction}


	This is a proposal for the continued activity of the Formal
Reasoning Group of the Computer Science Department of Stanford University.
In the next three years the Formal Reasoning Group proposes to concentrate
on the problems of creating a general database of common sense knowledge
as well as continuing its basic research in artificial intelligence,
computational non-monotonic reasoning,
interactive theorem proving and proving that computer programs satisfy
their specifications.  The work will be based on our previous work in
formalization of common sense knowledge by John McCarthy, non-monotonic
reasoning including especially circumscription by McCarthy and Vladimir
Lifschitz, interactive theorem proving including EKL by Jussi Ketonen and
Natarajan Shankar's use of the Boyer-Moore theorem prover, and work in
mathematical theory of computation by McCarthy and more recently by
Carolyn Talcott.  The work in interactive theorem proving will be joined
by Natarajan Shankar.

The work of Yoav Shoham, a new Computer Science faculty member, in
formalizing properties of processes occurring in time is also included
in this proposal.
\vfill\eject
\section{Common Sense Knowledge and Reasoning} % jmc & val

\subsection{Background and Motivation}


	Since much of this work is basic research, its relevance to DoD
concerns needs to be described.  DoD, especially via the Strategic
Computing program, is increasingly preparing to use expert systems.  These
expert systems are intended to embody in computer programs the knowledge and
reasoning ability of an expert in a given domain and make this capability
available to military personnel doing their jobs.  The autonomous land
vehicle, the pilot's associate and the naval battle management projects
are specific embodiments of this technology.  Our work will help identify
and overcome important limitations of present expert system technology.
A 1983 view of these limits and how they might be overcome is in
(McCarthy 1983), ``Some Expert Systems Need Common Sense'',
 included in this proposal as Appendix A.

	Here is a concise version of our present opinion of these limitations.

	1. We can say that a program has {\it common sense} if it has
available to it sufficiently obvious consequences of what it learns about
a present situation and the general knowledge it has stored in its
database. For many purposes, the most important kind of common sense
knowledge deals with how the consequences of the various actions an agent
might take depend on the facts of the situation.  We include technical
knowledge with common sense knowledge provided, as is usually the case,
the reasoning to be done with it is common sense reasoning.

	For example, a battle management
program needs to know the effects of ordering a ship to move from one
location to another, e.g. to know that the ship will be at the location
to which it has been ordered.  However, there are preconditions for moving
the ship, e.g. it must have fuel and must be in working order.  The system
must be able to react correctly to the information that a ship has been
ordered to a new location, or that it is short of fuel.  Commonsense
reasoning of this kind is trivial for humans, but it doesn't seem to be
trivial for humans to design a program capable of simulating it.

	2. Many of the problems involved in the representation of
properties of actions require the use of {\it non-monotonic reasoning}.
(McCarthy 1986), ``Applications of Circumscription to Formalizing Common
Sense Knowledge'', explains about the importance of non-monotonic
reasoning in common sense.  It is included in the proposal as Appendix B.
See especially sections 1 and 2.

	Here's an example.  Once we think about it, a precondition
for moving a ship is that it be possible to raise or detach its anchor
and that it not be blocked by other ships or by a barrier.  However,
not  all necessary conditions can be listed specifically.  Instead, after
listing the specific conditions, it is necessary to add a clause that
the ship can be moved unless something else prevents it.  Recall that
the Swedes contemplated building a concrete barrier around the Soviet
submarine that had run aground.  It sure wasn't specifically in
anyone's mind or anyone's database that there not be a concrete barrier
as a condition for moving a submarine.  Human common sense handles
such matters by non-monotonic reasoning.

	Recently the non-monotonic formalisms concerned with
determining the effects of actions were tested by ``the Yale
shooting problem''.  Lifschitz and others have developed
ways of solving the difficulty thus presented. (See Lifschitz's paper
(1987), included in this proposal as Appendix C).
 We plan further development of these formalisms.

	3. An important goal in the development of programming languages
is achieving modularity. Parts of a modular program can be reused; modular
programs are easier to design, debug and modify. A strong form of
modularity is achieved by representing information {\it declaratively},
rather than embedding it in programs. Declarative information can be
transmitted to another agent even if it is not known who the recepient is
or how the information is to be used.  Prolog is an example of a
programming language whose programs themselves are collections of facts.
The various AI shells like Emycin, OPS-5, KEE and ART also represent as
much information as possible declaratively.

	However, none of these systems goes as far
as human use of declarative representation. While new facts
can be added to programs in any of the above systems, this has to
be done for most kinds of facts by a programmer who understands
the existing expert system.  For example, an expert system about
ships can perhaps be told about a new ship by a non-programmer who
is asked for its name, displacement and other qualities that have
been included in ship descriptions.  However, a present-day system ordinarily
cannot be told previously unknown obstacles to moving a ship or
 the effects of a new enemy weapon except by someone
familiar with the program.  The consequence may be that the users of
such a system may face an unpleasant choice between getting wrong
answers from the system and turning it off until it can be modified
and the modification debugged.

	As the declarative or logicist approach to AI advances,
the class of statements that can be made directly to an expert
system without reprogramming it or even understanding it in detail
increases.  Already formalized non-monotonic reasoning has increased
our ability to tell systems about new conditions that may interfere
with the plans of an agent and how they may be overcome.

	4. From a general point of view, non-monotonic reasoning
methods allow us to say which {\it models} of a collection of facts
are preferred, i.e. which of the possibilities allowed by the
facts are to be assumed.  Present non-monotonic formalisms make
these choices depend only on the objective circumstances.  It may
be that the choice should in some cases depend also on the system's
goals and state of knowledge.  Technically, this involves making
the {\it circumscriptions} depend on {\it intensional} as well
as {\it extensional} information.  We are exploring how to do this.

 	5. It has further become apparent that human information
is expressed in ways that depend heavily on {\it context} and that AI
systems will require similar capability.  This
has been obscured in AI systems by the fact that these systems
work in single, limited contexts.  For example, in one context
moving a ship from one place to another is a unitary act, and
the system represents directly facts about its preconditions and
consequences.  From another point of view, the same action is
the result of a complex strategy and any instance of moving the
ship is compounded from a substantial number and variety of
different actions.  The facts about moving ships need to be
represented in a variety of levels of detail.


\subsection{Proposed Work}


	We propose to continue the investigation of non-monotonic theories
and their applications, concentrating on the representation of knowledge about
the effects of actions. We will develop methods for the formal treatment of
contexts. All these problems and more come together in the
task of creating a database for common sense reasoning.  Therefore,
the creation of such a database is the major applied task we
are undertaking as part of this proposal.


\subsubsection{Reasoning about the Effects of Actions}


	Our current approach to representing knowledge about the effects
of actions is based on three ideas.

	One is the use of {\it situation calculus} (McCarthy and Hayes 1969),
which allows us to describe the changing world by means of explicit references
to ``situations'', or states of the world. For example, we express that the
ship $Ship10$ is in the port $Port20$ in the situation $Sit30$ by writing
$holds(at(Ship10,Port20),Sit30)$. The first argument of the predicate $holds$ is a
``propositional fluent'', i.e., a fact which may be true in some situations and
false in some others.

	The other idea is the use of {\it circumscription} (McCarthy 1980, 1986)
for telling the system when it is allowed to derive a conclusion ``by default''.
For example, we may wish the system to be able to conjecture about a given ship
that it is in working order if there is no information to the contrary. This
is expressed by the formula $¬ab1(ship,s)⊃holds(working(ship),s)$, where $ab1$
is an auxiliary ``abnormality'' predicate. This predicate is circumscribed,
i.e., assumed to have a minimal extension compatible with the available facts.

	Finally, we describe causal relations between actions and the values of
fluents using the approach of (Lifschitz 1987). The formula $causes(a,f,v)$
expresses that the action $a$, if successfully performed, causes the fluent
$f$ to take on the value $v$. For instance, the effect of the action
$move(ship,port1,port2)$ (moving a ship from one port to another) can be
described as follows:
%
$$causes(move(ship,port1,port2),at(ship,port2),true).$$
%
In addition, we specify {\it preconditions} for the successful execution of an
action using the notation $precond(f,a)$, which means: the propositional
fluent $f$ must be true if the action $a$ is to be successful.
For example, we can describe some of the preconditions for moving ships by
the formulas
%
$$precond(at(ship,port1),move(ship,port1,port2)),$$
$$precond(working(ship),move(ship,port1,port2)).$$
%
An action whose preconditions are satisfied can be successfully executed:
%
$$success(a,s)≡∀f(precond(f,a)⊃holds(a,s)).$$

	Circumscription is used to minimize the predicates $causes$ and $precond$.
By circumscribing $causes$, we express the assumption that actions cause no
changes in the world other than those specified in the database. For
instance, moving $Ship10$ does not affect the position of $Ship11$. In this
way circumscription resolves the difficulty known as the ``frame problem''.
By circumscribing $precond$, we assume that actions have no preconditions
other than those included in the database. If, for example, the system knows of
no preconditions for $move(ship,port1,port2)$ other than the two shown above then,
having verified that 
$$holds(at(Ship10,Port20),Sit30)$$
and
$$holds(working(Ship10),Sit30),$$
it will be able to derive the conclusion
%
$$success(move(Ship10,Port20,Port21),Sit30).$$
%
But we are free to add facts which extend the list of preconditions,
and then the system will automatically revise its conclusions
regarding the possibility of the successful performance of the action.
For instance, we can add a restriction on the distance between the ports:
%
$$precond(less(distance(port1,port2),limit(ship)),move(ship,port1,port2)).$$
%
Then the system will have to retract its conclusion about the possibility of
moving $Ship10$ from $Port20$ to $Port21$, and accordingly revise the plans
it has produced, unless the new precondition can be
verified on the basis of the available information about the locations of
the ports. Facts of this kind cannot readily be added to the database of
present expert systems without understanding and revising the existing
program.  Our goal is to make it possible to add such facts limiting
the applicability of previous rules without having to read or understand
them fully.  Humans can do it, and it's what formalized non-monotonic
reasoning promises for computer programs.


\subsubsection{Computational Non-Monotonic Reasoning}


	The definition of circumscription involves a second-order quantifier, and
working with this definition directly is difficult for both man and machine. As
a remedy, Lifschitz (1985) identified the ``separable'' case of circumscription,
when effective methods exist for representing circumscription by a first-order
formula (the paper is included in the proposal as Appendix D).
The instances of circumscription used in applications are often
separable or can be reduced to the separable case.

	To illustrate the progress made in computational non-monotonic reasoning,
consider two examples from McCarthy's paper on applications of circumscription
(McCarthy 1986). In the ``flying birds'' example of an inheritance hierarchy
with exceptions, the result of circumscription
is justified by a rather long dialog with an interactive proof checker; more
importantly, one had to ``guess'' the result before checking its correctness.
Later the separable case of circumscription was identified, and it became
clear that the database in that example is separable. The result can be
found then by a routine computation. In fact, an experimental program
written recently by Arkady Rabinov of our group does computations of this
kind automatically.  The other example is the axiom set for actions in the
blocks world, intended to illustrate the use of circumscription for
solving the qualification problem and the frame problem. Due to
computational difficulties, it was not recognized in the initial drafts of
the paper that the proposed formulation had a bug. It is clear now how to
formalize this sort of reasoning about actions in a separable way.

	Another promising approach to computing circumscription is based on its
relation to logic programming. Logic programs can be viewed as a special case of
circumscriptive theories (Lifschitz 1987a). It is known, on the other
hand, that some circumscriptive theories can be ``executed'' as PROLOG programs.
Przymusinski (1986) proposes an algorithm for query answering in circumscriptive
theories using ideas of logic programming and resolution.


\subsubsection{Formalization of Contexts}


	The proposed work on contexts is quite new; there are no published
papers.  Part of the reason the area hasn't been studied before is that it
depends on non-monotonic reasoning, and this has only recently been
developed.

	Consider ``The flounder crashed on the beach''.  The word
flounder usually denotes a fish, but according to certain conventions,
it might also be used to as a nickname for a Soviet fighter plane.

	We propose to formalize the idea of context.  Propositions are
reified and are asserted by formulas of the form $holds(p,c)$,
where $p$ is the proposition and $c$ is a context.
There is a partial order $c1 ≤ c2$ among contexts interpreted as
asserting that $c1$ is less general, i.e. has more specific assumptions,
than $c2$.  Thus we will have $c1 ≤ c2 ∧ holds(p,c2) ⊃ holds(p,c1)$.
Different contexts are specific about different things, and there is
no most general context in which nothing is assumed; contexts can always
be generalized further. There is a
non-monotonic rule asserting that normally something holding in the
more special context also holds in a more general context.
We believe that formalized contexts will permit general databases
that contain implicit assumptions as does human knowledge.  The
reasoning program will be able to transcend any of these assumptions
when there is evidence that this should be done.


\subsubsection{The Common Sense Database}


	A system capable of reasoning about moving ships has to know a large
number of facts in its special area of expertise. But this knowledge is
useless unless it is supplemented by some facts of a more general nature.
For example, the system must know that, when an action has been
successfully executed, each fluent affected by the action gets the value
the action causes it to take on, and all other fluents retain their old values.
Symbolically:
%
$$success(a,s)∧causes(a,f,true)⊃holds(f,result(a,s)),$$
$$success(a,s)∧causes(a,f,false)⊃¬holds(f,result(a,s)),$$
$$success(a,s)∧∀v¬causes(a,f,v)⊃(holds(f,result(a,s)≡holds(f,s)).$$
%
The system must know some basic facts about the durations of events:
%
$$duration(a)>0,$$
$$time(result(a,s))=time(s)+duration(a).$$
%
It should also have some familiarity with the geometry of the physical space.
Such ``general-purpose'' commonsense facts would be shared by databases
designed for applications in many different areas.

	On the basis of our work on the formal treatment of non-monotonic
reasoning and contexts, we propose now to create a small database of general
commonsense facts and to design and partially implement a system able to perform
reasoning on the basis of these facts.
The core of the database will consist of a few dozen very general facts,
as in the examples above. It will be possible to give the system facts
about specific domains, such as ships or weapons, and instruct it to include
the facts in the database and to use them in reasoning.

	The use of such a powerful and purely declarative mechanism of
knowledge representation as classical logic extended by circumscription
will allow us to achieve a high degree of modularity. Reasoning programs
which have different goals and use different internal representations of
information and different heuristics will all be able to use the same database.
They will be able to share knowledge with each other by exchanging messages
in the language of the database without taking into account the internal
structure of other participants of the dialogue. Humans who want to understand
these messages will not have to know anything at all about the structure of
the programs which send them.

These features of the proposed commonsense database project will make it an
important step in the development of the idea of the {\it Common Business
Communication Language} (CBCL), described by McCarthy (1982) in the
following way:
``Any organization should  be able to communicate with  any other
without pre-arrangement  over ordinary dial-up telephone connections$\dots$
The  system  should  be open  ended  so that  as  programs
improve, programs that  can at first only order  by stock numbers can
later be programmed  to inquire about  specifications and prices  and
decide  on  the best  deal.    This  requires that  each  message  be
translatable into  a human-comprehensible form and that each computer
have a  way  of  referring  messages  it is  not  yet  programmed  to
understand to humans$\dots$
CBCL is strictly a communication protocol.  It should not
presuppose any data-base format for the storage within machines of
the information communicated, and it should not presuppose anything
about the programs that use the language.  Each business using the
language would have a program designed to use the particular part
of CBCL relevant to its business communications.  Thus CBCL presupposes
nothing about the programs that decide when to order or what orders
to accept.
CBCL is not concerned with the low-level  aspects of  the
message formats,  i.e., what kinds  of bit streams  and what  kinds of
modems, except to  remark that the system should avoid traps in these
areas,  and  the  users  should  be  able  to  change  their  systems
asynchronously.''

	The main objective of the project will be to show that the database can
serve as an epistemologically sound foundation for certain kinds of commonsense
reasoning. This will be done primarily by analyzing the properties of the database
as a circumscriptive theory and by experimenting with the reasoning program
which has access to the database.

	We propose the following timetable for the development of the database.

\disleft 100pt:
{I. Completed}:
Develop algorithms for computing the result of circumscription.

\disleft 100pt:
{II. Months 1-6}:
Implement algorithms for computing the result of circumscription and
conduct an experimental study of their efficiency. Define the general
structure of the database and the syntactic form of facts to be included
in it.

\disleft 100pt:
{III. Months 7-12}:
Integrate the circumscription program with a general-purpose proof checker.
Design a small prototype of the commonsense database capable of performing
commonsense reasoning in a more limited domain.

\disleft 100pt:
{IV. Months 13-18}:
Design the first version of the database.

\disleft 100pt:
{V. Months 19-24}:
Implement the first version of the database.

\disleft 100pt:
{VI. Months 25-36}:
Finalize the database. Investigate its capabilities and limitations.
Investigate the capabilities of heuristics for automated reasoning based
on circumscription. Determine directions for further research.

\section{Formalisms for Reasoning about Programs}  % arp86.clt[1,clt]


Carolyn Talcott is studying formal systems and structures for representing
and proving properties of programs,
computations, programming systems, and programming environments.

One objective is to account for and elucidate programming practices common
in the Artificial Intelligence community.  This includes studying the
kinds of programs and structures used in building systems for
representing knowledge, reasoning, and planning, and in building systems
for building systems.
An important aspect of the AI style of programming is the use of programs
and other complex structures in a variety of ways.  Some examples are:

\noindent
1. Using programs to compute (partial) functions.  This is the `obvious'
use of programs. The important point is that the data structures these
programs use often represent complex objects of the sort not usually
treated in work on program verification or specification.

\noindent
2. Using programs as functional arguments.
For example, a mapping function takes a function and a structure
as arguments and produces a new structure by applying the given 
function to each component of the input structure.
Search routines are often parameterized by functions for
generating successor nodes or by functions for evaluating positions.
Programs that take functional arguments are a generalization
of program schemes and properties of such programs can be used
as principles for proving properties of particular instances.

\noindent
3. Programs returning functional values.  Specifying some of the
arguments of a function and then `partially' applying that function
produces a second function as a value.  The function
$λ(\hbox{x},\hbox{y})\hbox{x}+\hbox{y}$, when
supplied with the argument 7 for the parameter y, yields the function
$λ(\hbox{x})\hbox{x}+7$, which adds 7 to its
argument.  Combinators applied to functions return functions as values.
For example $\hbox{compose}(\hbox{f},\hbox{g})(\hbox{x}) =
\hbox{f}(\hbox{g}(\hbox{x}))$ and
$\hbox{subs}(\hbox{f},\hbox{g})(\hbox{x}) =
(\hbox{f}(\hbox{x}))(\hbox{g}(\hbox{x}))$.

\noindent
4. Using programs to represent procedural information.
Information in data bases, knowledge, and problem solving strategies
may be represented procedurally (as a program).

\noindent
5. Using programs to represent infinite or partially completed objects
such as streams, sets, or continuations.  Suppose there are many solutions
to a given problem. One means of representing the solutions is by a program
which, when queried, produces a solution and another program representing
the remaining solutions.

\noindent
6. Using progams as functions that specify control structure.
Co-routines (co-operating processes) can be expressed using functions as
can many forms of delayed evaluation.  For example $λ()\hbox{f}(\hbox{x})$
represents f applied to x, but will only be evaluated if explicitly called
for (by application to an empty list of arguments).

\noindent
7. Using programs as behavioral descriptions of complex
data structures  -- object oriented programming. 
Instead of implementing a data structure as
a block of storage along with a set of programs for operating on that
storage, one can write a program with local store
which takes operation names
and additional parameters as arguments and carries out the named operation,
updating its local store if necessary,
and returning some information about the data structure or
 perhaps returning a 
program representing the updated data structure as a value.

\noindent
8. Using programs as objects that can be operated on.
We include here generating interpreting, and compiling programs.
A further example is the transformation of simple
clear but inefficient programs into possibly complex and obscure but efficient
programs.

Other aspects of this style of programming include non-local control
mechanisms such as catch and throw, macros and metafunctions (for example,
to define new constructs or implement convenient abbreviations), `hooks'
into the underlying system (hooks provide dynamic access to the symbol
table and other data structures used by the interpreter and other system
routines), and the use (often interchangeably) of both compiled and
interpreted code.

\subsection{Goals and applications}

Our goal is to be able to represent the wide variety of programming styles
used in AI and other areas and to treat the diverse uses of programs in an
integrated system.
We want to develop informal and formal methods to express and prove
properties that reflect naturally the intent of system designers and
programmers.  The informal part is based on a model of computation
(computation structure) that naturally accounts for the various styles of
programming encountered in AI systems.  There are two ways to mechanize
this model.  One is to encode the structure as data.  The second is to
define a formal theory for proving assertions about the structure.  The
connection between the two provides additional means of expressing
properties of the system.

Although the initial work will be abstract and technical, there
are many practical applications.  Some of the applications are summarized
below.

\noindent
1. Mechanization of the system via formalization of the metatheory and
model theory will provide an implementation of the system and a basis for
mechanical proof checking.  It also will provide tools for building
systems---by providing data types corresponding to some of the complex
structure used in these systems.

\noindent
2. We will be able to prove properties of `real' Lisp programs, such as
programs that contain arbitrary assignments, non-local control such as
CATCH and THROW, as well as programs containing MACROs and META functions.
We will also treat programs that act on memory structures---such programs
in Lisp use operations that are destructive on list structure, such as
RPLACA and RPLACD.  We will be able to treat programs using powerful
control definition mechanisms such as environment descriptions of
Interlisp and stack groups of Zetalisp

\noindent
3. We will formalize the notion of a computation system as computation
theory plus implementation.  Here we move from the static aspects of a
computation theory to the dynamic aspects of interactive systems.  An
interactive and dynamically changing system will be explained in terms of a
theory, a model, and an interpretation (dynamically) connecting the two.

\noindent
4.  Formalization of the dynamic aspects of a programming system will
provide a framework for talking about declarations and definitions,
debugging, handling ERROR conditions, and distinguishing between different
kinds of undefinedness---abnormal termination versus non-termination.  A
clean, simple description of a system makes provision for and use of debugging
facilities easier, more straight-forward, and more effective.

\noindent
5. Tools for expressing and proving properties of high-level program descriptions
are useful for compiling and automatic program optimizations.




\subsection{Intensional theories of computation with higher order abstractions}

An intensional computation theory contains a computation domain 
(arguments and values for computations); 
programs (expressions describing computations); 
and an abstract machine with structures for representing computations and rules
for carrying out computations.
Our notion of computation theory draws on ideas of an equation calculus
[Kleene 1952] and [McCarthy 1963], 
the SECD machine of [Landin 1964-6],
abstract recursion theory [Moschovakis 1969],
and information structures [Wegner 1972].
The resulting programming
system is related to SCHEME [Steele and Sussman 1975].

Our computation theories are constructed uniformly from
data structures---a collection of data and operations taken to be
primitive.  The computation domain includes the given data and 
primitive operations, functional abstractions called {\it pfns} and
and control abstractions called {\it continuations}.
Pfns are Partial FUNctions  containing 
information describing how the value of the function is to be computed.
Applicative, imperative, iterative, recursive, and many other styles of
programming are naturally expressible using pfns.
Continuations represent computation contexts built up in the process
of carrying out computations. 
As objects of the computation domain,
continuations provide a means of remembering and switching contexts,
and of suspending and resuming computations.
Thus continuations can be used to program escape and co-routine mechanisms.

Within these theories the semantics of a programming language can be expressed
in a number of ways, for example as the function computed by an
applicative program or by using structures representing the computation
defined by a program.  The connections between a program, the function it
computes, and the computation it defines can be made precise.  The
different views of programs (a) as functions---which may take functions as
arguments and return functions as values---and (b) as data can be
mechanized.  This is crucial for applications of the theory to other areas
of reasoning such as building models or plans.


\subsection{Computations and programs as data}

Having programs and computations as data allows us to treat 
operations on programs and computations.  
In addition to being able to define operations, we
can represent and prove properties of these operations such as
soundness -- preserving the function computed; program size; and
the effect of an operation on the properties of the computation 
described by a program.


The traditional kinds of operations on programs include transformations,
translations and program construction.  
A further kind of operation on programs is the construction of derived
programs; this is based on an idea of McCarthy.  Derived programs compute
properties of the original program.  Complexity measures can be expressed
via derived programs.  For example, depth of recursion, number of CONSes
executed, or number of subroutine calls are typcial complexity measures
for Lisp.  Properties of the these derived programs are similar to
properties treated in the traditional work in analysis of algorithms.
Many other properties of a program can be computed by derived programs.
Some examples are structures representing traces or complete computation
trees and other structures useful for debugging.

Reflection principles formalize the connections between the
views of programs as functions, as descriptions of computation, and as data
to be operated on.
The addition of reflection principles provides a mechanism
for extending the system, 
for conversing with system about itself, and for
defining programming and debugging tools within the language.
For example Lisp primitives such as QUOTE and CATCH and THROW
can be defined using reflection. Programs for examining and
updating the current environment can be written using reflection.
The idea of a formal theory extended by adding reflection principles has
its origin in formal metamathematics
(Kleene[1952]) and arithmetization of formal theories of arithmetic
(see for example Smorynski[1977]).


\subsection{Notions of equivalence of programs and  computations}

When programs are used in the different contexts and with the variety 
of interpretations discussed above, questions such as correctness of 
program transformations become complex.  In order specify the intent 
and correctness of programs and operations on programs we must be able 
to express the precise sense in which programs are to be considered equivalent. 
The classical (extensional) notion is that program are equivalent if 
they are defined on the same domain and have the same value on all arguments
in the domain of definition.  When arguments and values can be
function and control abstractions this is not an adequate definition.
Notions of equivalence are also important simply to express the fact that
different programs define the same computations or have the same
behavior.

\subsection{Accomplishments}

The major accomplishments in the area of MTC 
were the completion of two theses
(Talcott[1985] and Mason[1986]) developing mathematical semantics
for various aspects of Lisp style computation and 
using the semantics as a basis for proving properties of
a wide variety of programs.  
Preliminary versions of some of this work are reported in
Mason and Talcott[1985] and  Mason[1986a].  This work is summarized
in the thesis abstracts included below.

\subsubsection{Abstract of Talcott[1985]}
\centerline{The Essence of Rum}
\centerline{A Theory of the Intensional and Extensional Aspects}
\centerline{of Lisp-type Computation}
 
\smallskip
\centerline{Carolyn L. Talcott, Ph.D.}
\centerline{Stanford University, 1985}
\medskip


Rum is a theory of applicative, side-effect free computations over an
algebraic data structure.  It goes beyond a theory of functions computed
by programs, treating both intensional and extensional aspects of
computation.
%
Powerful programming tools such as streams, object-oriented programming,
escape mechanisms, and co-routines can be represented.
%
Intensional properties include the number of multiplications executed, the
number of context switches, and the maximum stack depth required in a
computation.
%
Extensional properties include notions of equality for streams and
co-routines and characterization of functionals implementing strategies
for searching tree-structured spaces.
%
Precise definitions of informal concepts such as stream and co-routine are
given and their mathematical theory is developed.
%
Operations on programs treated include program transformations which
introduce functional and control abstractions; a compiling morphism that
provides a representation of control abstractions as functional
abstractions; and operations that transform intensional properties to
extensional properties.
%
The goal is not only to account for programming practice in Lisp, but also
to improve practice by providing mathematical tools for developing
programs and building programming systems.

Rum views computation as a process of generating computation structures --
trees for context-independent computations and sequences for
context-dependent computations.  The recursion theorem gives a fixed-point
function that computes computationally minimal fixed points.  The context
insensitivity theorem says that context-dependent computations are
uniformly parameterized by the calling context and that computations in
which context dependence is localized can be treated like
context-independent computations.
%
Rum machine structure and morphism are introduced to define and prove
properties of compilers.
%
The hierarchy of comparison relations on programs ranges from intensional
equality to maximum approximation and equivalence relations that are
extensional.  The fixed-point function computes the least fixed point with
respect to the maximum approximation.  Comparison relations, combined with
the interpretation of programs using computation structures, provide
operations on programs both with meanings to preserve and meanings to
transform.

\subsubsection{Abstract of Mason[1986]}
\centerline{The Semantics of Destructive Lisp}
\smallskip
\centerline{Ian Alistair Mason}
\centerline{Stanford University, 1986}
\medskip

In this thesis we investigate various
equivalence relations between expressions
in first order LISP. This fragment of LISP
includes the destructive operations {\it rplaca}
and {\it rplacd}. 
To define the semantics we introduce
the notion of a memory structure. The equivalence 
relations are then defined within this model theoretic
framework.

A distinction is made
between
intensional relations and extensional relations.
The former class turn out to have a much more managable
theory than the latter.
The principle intensional relation studied is 
{\it strong  isomorphism}, its properties
allow for elegant verification proofs
in a style similar to that of pure Lisp.
In particular the relation is preserved under many 
standard syntactic manipulations and transformations. In particular
it satisfied a Substitution theorem;
any program that is obtained from another by replacing a portion by another
strongly isomorphic one is guaranteed to be strongly isomorphic to the
original one.

A plethora of verification proofs of both simple and complex programs was  
given using the intensional equivalence relation. All of these proofs
were of the transformation plus induction variety. In contrast,
we gave some verification proofs of programs, using the extensional
relations. Because the Substitution Theorem fails for these extensional relations,
the proofs were necessarily of the hand simulation variety.

In a more theoretical light, we also proved that the  equivalence relations
introduced here are decidable, and used them to study the expressive powers
of certain fragments of Lisp.


\subsection{Objectives}

Our objectives in the are of MTC for the next three years are:

(1) Develop a theory of computation that treats function
and control abstractions and operations that alter memory.
This will be based on Talcott[1985] and Mason[1986] and
will include these theories as fragments.

(2) Extend (1) to treat asynchronous message passing and 
synchronization.  This will be a model for actor style
computation in addition to the programming styles previously
modeled.

(3) Study notions of program equivalence in the theories
developed in (1) and (2).  Extend the notions
of {\it strong equivalence} (Mason[1986]);
comparison relation (Talcott[1985]); and operational equivalence
(Plotkin[1975], Talcott[1986op]) to the models of (1) and (2).

(4) Extend the notions of derived properties and programs (Talcott[1985])
to treat memory and communication resources.

(5) Apply  the results of (1-3) to proving properties of programs.
Examples will include
(5.1) Small-talk like objects (function abstractions with internal state);
(5.2) streams and co-routines represented using internal state and updating
     -- this is an extension of Talcott[1985] where
      streams and co-routines were represented purely applicatively
      passing updated information as parameters;
and
(5.3) representing network protocols such TCP-IP or SMTP as actor like
  objects and proving they meet the specifications.


(6) Extend the theories of (1-2) by addition of computational reflection 
principles  and study the mathematical properties of programs that use reflection.
Investigate such questions as 
(6.1) what are useful notions of proram equivalence?
(6.2) when is a program that uses reflection equivalent to
     a program not using reflection?
(6.3) when is a program that uses reflection extensional?
     i.e. when does it depend only on ``black box'' behavior 
     of its arguments?

(7) Continue work on developing program transformations that
    handle function and control abstractions, assignment and
    operations that alter memory.  Extend this to develop transformations
    that handle message passing and reflection.
    For example - transform applicative programs that use functions to
    represent computation state or into programs that use 
    data structures to represent computation state and updating
    to reuse resoures and represent function application.
    Similar methods can be used to transfrom object oriented
    specifications of data into algebraic data structures
    (Steele and Sussman [1976]).
    A second example is developing transformations that
    convert message passing to function application.

(8) Formalize the results of (1-3,6) 
 using formal reasoning systems such as the Boyer-Moore
theorem prover (Boyer-Moore[1979]) FOL(Weyhrauch[1980] or
EKL(Ketonen[1984]).  Use the formalization
for formalizing reasoning about programs and about operations on
programs.

\vfill\eject

\section{Experiments in automatic proof-checking}

Automatic  proof-checkers  are  useful  for  mathematics, program verification,
computer-aided instruction, and in artificial intelligence [McCarthy (1962), 
Shankar (1985), 
Boyer and Moore (1979), Gordon et al. (1979), Constable et al. (1986)],
Ketonen (1984).  While
present-day  proof-checkers  can be used to check fairly complex proofs, we are
still far away from a system which can check proofs at the level of  exposition
of a journal article.

Our  research  will be focussed on a careful analysis of the difference between
rigorous, informal proofs and their mechanizations  using  currently  available
proof-checking  systems.    It will be necessary to develop sensible metrics in
order to facilitate such a comparison.  Based on this analysis,  we  intend  to
design  and  implement  a  proof-checking  system which narrows the gap between
informal and mechanical proofs.

Initially, the proof-checker will consist of a small, believable core  that  is
sufficient to carry out certain metamathematical proofs [Shankar (1986)].  
The proof-checker
will then be extended by having it prove the soundness of extensions to itself.
Such   extensions   can  be  used  to  raise  the  level  of  exposition  of  a
machine-checked proof.  The proof-checker will be used to check several  proofs
in   a   variety  of  formal  theories  including  set  theory,  graph  theory,
combinatorics,  number  theory,  etc.,  and  in  the  areas  of   non-monotonic
reasoning,  program  and hardware verification, etc.  These proofs will be used
to develop a large database of useful, general mathematical facts and  provably
sound decision-procedures.

The  tangible  outcome of the project will be a proof-checking system which can
serve as a mathematician/programmer's workbench [Knuth (1984)].  
The experiments will also
lead  to  a  better  insight  into  the  process  by  which  humans  understand
mathematical explanation, and the extent to which  a  similar  process  can  be
programmed into a computer.

\vfill\eject

\section{Reasoning about processes occurring in time}

Yoav Shoham is joining the Computer Science Department as an Assist. Prof.
and is also joining the Formal Reasoning Group. Here is what he proposes to do:

Research Goals
                                                                      
Stated broadly, my goal is to produce a system for reasoning about time that 
has two properties:

\item{1.}
The system is required to be both rigorous and natural: it needs
precise and yet intuitive semantics.

\item{2.}  
The rigor is not the solution
to any problem, only a prerequisite for giving one. 
The system must support specific sorts of inferences;
Those should be
efficient, and hopefully also natural.

The above requirements are not to come at the expense of realistic models of 
the world.
The rules according to which the world operates are complex, and formulations which
require one to overidealize it 
are of no interest.
The proposed project is a continuation of my Ph.D. dissertation.
In the following I expand on these goals, and explain how I plan to achieve them.

The rigor of the system will derive from its logical basis. 
Without going into too
many details, there are quite a few ways in which to represent temporal
information in a logical system (see publication 8 for an overview of such
systems in AI, philosophy, and theoretical computer science, and publication
10 for a close scrutiny of temporal logics used in AI). 
Careful selection of a temporal formalism results also in a very intuitive system. 
The popularity of McCarthy and Hayes' situation calculus,
McDermott's temporal logic, and Allen's interval calculus
was due to their intuitive appeal at least as much as it was to their
technical merits. I intend to retain this characteristic of these systems.

This first goal has to a large extent already been met (see `timetable' below).
The second one has not, although considerable progress has been made towards it,
and that is where the research effort will be concentrated. 
One can identify in AI several classes of tasks which require reasoning about time,
and they include
prediction, explanation of observed phenomena, planning, and learning of
new rules of lawful change.
Although these classes of tasks are obviously related, in AI they have given rise to
by and large disjoint fields of research. Furthermore, most of the work in those areas
has been rather informal (to the detriment of its eventual effectiveness, it
is claimed here). I intend to first concentrate on the task of prediction,
and then expand to reasoning about planning (which includes the intermediate task
of explanation). 

Several problems arise already in the context of prediction. The best-known one
is the so-called {\it frame problem\/}. In my dissertation I argue that the 
frame problem actually embodies two distinct problems,
which I have called respectively the {\it qualification problem\/} and the
{\it extended prediction problem\/}. Briefly, they are the problems of
making statements of the form ``if this is true now then that is true later,''
without having the `this' part grow too large, and without having to make too
We are now in a position to give solutions to these problems. Beside my own solution 
(which appears in part in publication 11 and in full in my dissertation), there
have been related solutions proposed by Lifschitz, Kautz, Kowalski, and several
others. Most of these solutions rely on some form of nonmonotonic reasoning, 
or reasoning in frameworks which allow making ``defeasible'' inferences. This area,
pioneered by McCarthy, Reiter and 
McDermott, has seen many recent advances, due to McCarthy, Lifschitz, 
Etherington, Perlis, myself, and many others.  

As I have said, despite the recent progress, there is much to do before we can claim
to have laid the problem of temporal prediction to its final rest. The two main
issues that I see are extending current solutions to handle 
continuous and concurrent change, and applying our theories to more than toy test
cases. The following is a proposed timetable for tackling these issues.
 
\disleft 100pt:
{I. Completed}:Develop a temporal formalism, define its precise syntax and\
semantics, determine is complexity and expressiveness properties.
\disleft 100pt:
{II. Completed}:Develop a framework for nonmonotonic reasoning appropriate\
for temporal reasoning.
\disleft 100pt:
{III. Months 1-6}:Debug the theory of causal theories,
which is defined in terms of the nonmonotonic temporal logic,
and apply it to a substantial test case. In that connection we will
have a seminar on causal reasoning, and I expect a student to write
a program peforming causal projection, either in the domain of medical
diagnosis or in the domain of rigid physical objects.
\disleft 100pt:
{IV. Months 1-12}:Extend the theory of causal theories handle simultaneous
cause and effect, and especially continuous time. I have explored these
research directions at some length, but more work is needed here in 
order to devise efficient algorithms for these cases.
\disleft 100pt:
{V. Months 13-18}:The work so far will have concentrated on representing
and reasoning about forward projection in time. During this time period
we will start worrying about backwards projection, which in its simpler
form is the `explanation' task, and its more complex form is the `planning'
task. These tasks are inherently harder than the prediction task, because
of the special form of causal rules (which is reflected in the fact that 
for people  too planning is harder than predicting). This task will
amount to asking metalogical questions of the form ``what causal
theory would have this particular temporal fact in itc.m.i. model'' (c.m.i. 
models, or {\it chronologically most ignorant} models, are 
part of the underlying nonmonotonic temporal logic). Our work at this stage
will  stress the need for completely clear semantics, which most 
existing nontrivial planning systems lack.
\disleft 100pt:
{VI. Months 13-24}:Demonstrate the effectiveness of the planner in 
the domain of route planning and resource management. This domain, which
features continuous time, partial knowledge at the time of planning, and the
effect of actions in time, will also interface nicely with the next stage
of the project.
\disleft 100pt:
{VII. Months 18-36}:
It has to be admitted that so far logic has played a limited role in
``real world'' applications. At the same time, real applications tend to
suffer from 
unclarity, unmaintainability and inextensibility.
I would like to bridge this
gap between anaemic logical systems and theory-less working programs. This
is similar to Rosenschein's effort in applying formal theories of action to the
SRI robot. In our case too I plan to reason about a mobile robot, but with
the aim of escaping the bottleness of the sensing problem. For this reason 
I am thinking about airborne robots, which on the one hand do not have the
immediate problem of running into obstacles (and in those periods when
they do I plan to take manual control over them), and on the other hand
are very suitable to reasoning about time, deadlines, resource management,
technical malfunctions, and so on.

By the time this stage is reached I hope to have a parallel research effort going on which will 
investigate the hardware constraints and which will involve avionics experts, but this
part of the project will be independent. It will concentrate on issues of knowledge
representation and reasoning: Which of the real-world features require representation?
What alogorithms are needed? Are causal theories sufficiently expressive for this
task? And so on. By the end of this period I aim to have a prototype 
``airborne reasoner'' which will run in a simulated environment. Shortly afterwards
I hope to have a physical airborne device (construction of which will not 
be  part of this proposed research), at which time I will be able to
 experiment with the ``airborne reasoner'' in real situations. 

\vfill\eject




\section{The Formal Reasoning Group}

John McCarthy is Professor of Computer Science and has worked in the area
of formal reasoning applied to computer science and artificial
intelligence since 1957.  He is working on the logic of knowledge and
belief, the circumscription mode of reasoning, formalizing properties of
programs in first order logic, and a general theory of patterns.

Carolyn Talcott is a Research Associate in Computer Science,
 has a Ph.D. in chemistry from the University of California, and
 a Ph.D. in computer science from Stanford University.
She has done research in theoretical chemistry and theory of
finite groups.  She is currently interested in the formalization of
concepts and activities relating to computation and the eventual
application of the results to design and implementation of
ineractive programming systems and to the representation and
mechanization of reasoning.

%Joseph Weening is a Ph.D. student and Research Assistant in Computer Science.
%He received a bachelor's degree from Princeton University in mathematics and
%is interested in the application of mechanized formal systems to the semantics
%of programming languages, with the goal of producing machine-assisted proofs
%of program properties.


\vfill\eject
\subsection{Biography of John McCarthy}

\disleft 80truept:{{\bf
Born:}}:  September 4, 1927 in Boston, Massachusetts

\disleft 80truept:{{\bf
Education:}}:
	B.S.  (Mathematics) California Institute of Technology, 1948\hfill\break
	Ph.D. (Mathematics) Princeton University, 1951

\noindent
{\bf Honors and Societies:}

\disleft 20truept:{}:
	American Academy of Arts and Sciences
\hfil\break
	American Society for the Advancement of Science
\hfil\break
	American Mathematical Society
\hfil\break
	Association for Computing Machinery
\hfil\break
	IEEE
\hfil\break
	Sigma Xi
\hfil\break
	Sloan Fellow in Physical Science, 1957-59
\hfil\break
	ACM National Lecturer, 1961
\hfil\break
	A. M. Turing Award from Association for Computing Machinery, 1971
\hfil\break
	Editorial Board, Artificial Intelligence Journal, 1975 - present
\hfil\break
	Academic Advisor, National Legal Center for Public Information, 1976 - 1980
\hfil\break
	Board of Directors, Information International, Inc., 1962 - present.
\hfil\break
	Board of Directors, Inference Corporation, 1983 - present.
\hfil\break
	Sigma Xi National Lecturer, 1977
\hfil\break
	Fellow, Center for Advanced Study in the Behavioral Sciences, 1979 - 1980.
\hfil\break
	President, American Association for Artificial Intelligence, 1983-84

\noindent
{\bf Professional Experience:}

\disleft 20truept:{}:
Procter Fellow, Princeton University, 1950-51
\hfil\break
Higgins Research Instructor in Mathematics,
\hfil\break
Princeton University, 1951-53

\disleft 20truept:{}:
Acting Assistant Professor of Mathematics,
\hfil\break
Stanford University, Sept. 1953 - Jan. 1955

\disleft 20truept:{}:
Assistant Professor of Mathematics, Dartmouth
\hfil\break
College, Feb. 1955 - June 1958

\disleft 20truept:{}:
Assistant Professor of Communication Science,
\hfil\break
M.I.T., 1958 - 1961

\disleft 20truept:{}:
Associate Professor of Communication Science,
\hfil\break
M.I.T., 1961 - 1962

\disleft 20truept:{}:
Professor of Computer Science
\hfil\break
Stanford University, 1962 - present

\disleft 20truept:{}:
Director, Artificial Intelligence Laboratory
\hfil\break
Stanford University, 1965 - 1980

\noindent{\bf 
\centerline{Professional Responsibilities and Scientific Interests:}}

\disleft 20truept:{}:
With Marvin Minsky organized and directed the Artificial
Intelligence Project at M.I.T.

\disleft 20truept:{}:
Organized and directed Stanford Artificial Intelligence Laboratory

\disleft 20truept:{}:
	Developed the LISP programming system for computing with
		symbolic expressions, participated in the development
		of the ALGOL 58 and the ALGOL 60 languages.  Present
		scientific work is in the fields of Artificial
		Intelligence, Computation with Symbolic Expressions,
		Mathematical Theory of Computation, Time-Sharing computer
		systems.

\noindent
{\bf Publications}
\medskip
\item{1.} 
{\bf McCarthy, John (1951)}: ``Projection Operators and Partial Differential
Equations'' Ph.D.  Thesis, Princeton University.

\item{2.} 
{\bf McCarthy, John (1952)}: ``A Method for the Calculation of Limit Cycles by
Successive Approximation'' in {\it Contributions to the Theory of Nonlinear
Oscillations II}, Annals of Mathematics Study No. 29, Princeton University,
pp. 75--79.

\item{2.} 
{\bf McCarthy, John (1953)}: ``An Everywhere Continuous Nowhere Differentiable
Function,'' {\it American Mathematical Monthly}, December 1953, p. 709.

\item{3.} 
{\bf McCarthy, John (1954)}:  ``A Nuclear Reactor for Rockets'' {\it Jet Propulsion},
January 1954.

\item{4.} 
{\bf McCarthy, John (1955)}:  ``The Stability of Invariant Manifolds'' Applied
Mathematics Laboratory Technical Report No. 36, Stanford University, 25
pp.

\item{5.} 
{\bf McCarthy, John (1956)}:  ``The Inversion of Functions Defined by Turing
Machines,'' in {\it Automata Studies, Annals of Mathematical Study No. 34,}
Princeton, pp. 177--181.

\item{6.} 
{\bf McCarthy, John (1956)}:  ``Aggregation in the Open Leontief Model,''
in Progress Report of Dartmouth Mathematics Project.

\item{7.} 
{\bf McCarthy, John (1956)}:  ``Measures of the Value of Information,''
{\it Proceedings of the National Academy of Sciences}, September 1956.

\item{8.} 
{\bf McCarthy, John (1956)}:  Co-editor with Dr. Claude E. Shannon of {\it Automata
Studies}, Annals of Mathematics Study No. 34, Princeton University Press.

\item{9.} 
{\bf McCarthy, John (1960)}: ``Recursive Functions of Symbolic Expressions and their
Computation by Machine,'' {\it Comm. ACM}, April 1960.

\item{10.} 
{\bf McCarthy, John (1960)}: ``Programs with Common Sense,'' in Proceedings of the
Teddington Conference on the Mechanization of Thought Processes, Her Majesty's
Stationery Office, London.
%common[e80,jmc]

\item{11.} 
{\bf McCarthy, John (with 12 others) (1960)} ``ALGOL 60'', {\it Numerische
Mathematik}, March 1960, also in {\it Comm. ACM}, May 1960 and Jan. 1963.

\item{12.} 
{\bf McCarthy, John (1961)}: ``A Basis for Mathematical Theory of Computation'',
in {\it Proc.  Western Joint Computer Conf.}, May 1961, pp. 225--238.
Later version in Braffort, P. and D. Hirschberg (eds.) {\it Computer
Programming and Formal Systems}, North-Holland Publishing Co. (1963).

\item{13.} 
{\bf McCarthy, John (1962)}: ``Time-Sharing Computing Systems,'' in {\it Management
and the Computer of the Future}, Martin Greenberger (ed.), MIT Press.

\item{14.} 
{\bf McCarthy, John (with Paul Abrahams, Daniel Edwards, Timothy
Hart and Michael Levin) (1962)}: 
{\it LISP 1.5 Programmer's Manual}, M.I.T. Press, Cambridge, Mass.

\item{15.} 
{\bf McCarthy, John (1962)}: ``Computer Programs for Checking Mathematical Proofs'',
{\it Amer. Math. Soc. Proc. of Symposia in Pure Math.}, Vol. 5.

\item{16.} 
{\bf McCarthy, John (1963)}: ``Towards a Mathematical Theory of Computation'',
in Proc.  IFIP Congress 62, North-Holland, Amsterdam.

\item{17.} 
{\bf McCarthy, John (1963)}: ``A Basis for a Mathematical Theory of Computation'', 
in P. Braffort and D. Hirschberg (eds.), {\it Computer Programming and
Formal Systems}, North-Holland Publishing Co., Amsterdam, pp. 33--70.

\item{18.} 
{\bf McCarthy, John (1963)}: ``A Time-Sharing Debugging System for a Small
Computer'', (with Boilen, Fredkin and Licklider), Proc. AFIPS 1963 Spring
Joint Computer Conf., Sparten Books, Detroit, pp. 51--57.

\item{19.} 
{\bf McCarthy, John (1963)}: ``The Linking Segment Subprogram Language and
Linking Loader Programming Languages'', Comm. ACM, July 1963.  (with F.
Corbato and M. Daggett),

\item{20.} 
{\bf McCarthy, John (1965)}: ``Problems in the Theory of Computation'', in Proc.
IFIP Congress 65, Spartan, Washington, D.C..

\item{21.} 
{\bf McCarthy, John  (1966)}: ``A Formal Description of a Subset of Algol'',
{\it Formal Language Description Languages for Computer Programming},
T.B. Steel, Jr. (ed.), North-Holland Publ. Co., Amsterdam, pp. 1--12.

\item{22.} 
{\bf McCarthy, John (1968}:  ``Time-Sharing Computer Systems'', in
{\it Conversational Computers}, William Orr (ed), Wiley Publishing Company.

\item{23.} 
{\bf McCarthy, John (1966)}:  ``Information'', {\it Scientific American}, Vol. 215.

\item{24.} 
{\bf McCarthy, John (1967)}:  ``THOR --- A Display Based Time-Sharing System'', 
(with D. Brian,	G. Feldman, and John Allen)
{\it AFIPS Conf. Proc.}, Vol. 30, (FJCC) Thompson, Washington, D.C..

\item{25.} 
{\bf McCarthy, John (1967)}:  ``Computer Control of a Hand and Eye'', in {\it Proc.
Third All-Union Conference on Automatic Control (Technical Cybernetics)},
Nauka, Moscow, (Russian).

\item{26.} 
{\bf McCarthy, John (1968)}:  ``Programs with Common Sense,'' in M. Minsky (ed.), 
{\it Semantic Information Processing}, M.I.T. Press, Cambridge, Mass.

\item{27.} 
{\bf McCarthy, John (1968)}:  ``A Computer with Hands, Eyes, and Ears,'' 
(with L. Earnest, D. Reddy, P. Vicens) 
{\it Proc. AFIPS Conf.} (FJCC).

\item{28.} 
{\bf McCarthy, John and P.J. Hayes (1969)}:  ``Some Philosophical Problems from
the Standpoint of Artificial Intelligence'', in D. Michie (ed), {\it Machine
Intelligence 4}, American Elsevier, New York, NY.
%phil[ess,jmc] with slight modifications

\item{29.} 
{\bf McCarthy, John (1972)}:  ``The Home Information Terminal,'' Man and Computer, 
in Proceedings International Conference, Bordeaux 1970, S. Karger, N.Y.

\item{30.} 
{\bf McCarthy, John (1973)}:  ``Mechanical Servants for Mankind,'' in {\it Britannica 
Yearbook of Science and the Future}.

\item{31.} 
{\bf McCarthy, John (1974)}:
Book Review: ``Artificial Intelligence: A General Survey'' by Sir James
Lighthill, in {\it Artificial Intelligence}, Vol. 5, No. 3.

\item{32.} 
{\bf McCarthy, John (1974)}:  ``Modeling Our Minds'' in {\it Science Year 1975, The
World Book Science Annual}, Field Enterprises Educational Corporation,
Chicago, ILL.

\item{33.} 
{\bf McCarthy, John (1976)}:  ``The Home Information Terminal,'' invited presentation, 
AAAS Annual Meeting, Feb. 18--24, 1976, Boston.

\item{34.} 
{\bf McCarthy, John (1976)}:  ``An Unreasonable Book,'' a review of {\it Computer
Power and Human Reason}, by Joseph Weizenbaum (W.H. Freeman and Co., San
Francisco, 1976) in {\it SIGART Newsletter 58}, June 1976, also in {\it Creative
Computing}, Chestnut Hill, Massachusetts, 1976 and in ``Three Reviews of Joseph
Weizenbaum's {\it Computer Power and Human Reason}'', (with Bruce
Buchanan and Joshua
Lederberg), Stanford Artificial Intelligence Laboratory Memo 291, Computer
Science Department, Stanford, CA.

\item{35.} 
{\bf McCarthy, John (1977)}:
Review: {\it Computer Power and Human Reason}, by Joseph Weizenbaum (W.H.
Freeman and Co., San Francisco, 1976) in {\it Physics Today}.

\item{36.} 
{\bf McCarthy, John (1977)}:
``The Home Information Terminal'' {\it Grolier Encyclopedia}.

\item{37.} 
{\bf McCarthy, John (1977)}:
``On The Model Theory of Knowledge'' (with M. Sato, S. Igarashi, and
T. Hayashi), {\it Proceedings of the Fifth International Joint Conference
on Artificial Intelligence}, M.I.T., Cambridge, Mass.

\item{38.} 
{\bf McCarthy, John (1977)}:
``Another SAMEFRINGE'', in {\it SIGART Newsletter} No. 61, February 1977.
%samefr[f76,jmc]

\item{39.} 
{\bf McCarthy, John (1977)}:
``History of LISP'', in Proceedings of the ACM Conference on the
History of Programming Languages, Los Angeles.
%lisp[f77,jmc]

\item{40.} 
{\bf McCarthy, John (1977)}:
``Epistemological Problems of Artificial Intelligence'', {\it Proceedings
of the Fifth International Joint Conference on Artificial 
Intelligence}, M.I.T., Cambridge, Mass.
%ijcai.c[e77,jmc]

\item{41.} 
{\bf McCarthy, John (1979)}:
``Ascribing Mental Qualities to Machines'' in {\it Philosophical Perspectives 
in Artificial Intelligence}, Ringle, Martin (ed.), Harvester Press, July 1979.
%.<<aim 326, MENTAL[F76,JMC]>>

\item{42.} 
{\bf McCarthy, John (1979)}: 
``First Order Theories of Individual Concepts and Propositions'', 
in Michie, Donald (ed.) {\it Machine Intelligence 9}, (University of
Edinburgh Press, Edinburgh).
%.<<aim 325,concep[e76,jmc]>>

\item{43.} 
{\bf Cartwright, Robert and John McCarthy (1979)}:
``Recursive Programs as Functions in a First Order Theory'',
in {\it Proceedings of the International Conference on Mathematical Studies of
Information Processing}, Kyoto, Japan.
%.<<aim 324, FIRST.NEW[W77,JMC]>>

\item{44.} 
{\bf McCarthy, John (1980)}: 
``Circumscription --- A Form of Non-Monotonic Reasoning'', {\it Artificial
Intelligence}, Volume 13, Numbers 1,2, April.
%.<<aim 334, circum.new[s79,jmc]>>

\item{45.} 
{\bf McCarthy, John and Carolyn Talcott (1980)}: {\it LISP --- Programming and
Proving}, course notes, Stanford University. (to be published as a book).

%.<<The references in this bibliography (BIOJMC[1,JMC]) should be in a
%.uniform style, because I often copy them to papers.  The last few are
%.correct.  The publication in italics and first names spelled out.
%.>>

\item{46.} 
{\bf McCarthy, John (1982)}: ``Common Business Communication Language'', in
{\it Textverarbeitung und B\``urosysteme}, Albert Endres and J\''urgen Reetz, eds.
R. Oldenbourg Verlag, Munich and Vienna 1982.
%cbcl[f75,jmc]

\item{47.} 
{\bf McCarthy, John (1982)}: {\it Coloring Maps and the Kowalski Doctrine},
Report No. STAN-CS-82-903, Computer Science Department, Stanford University,
Stanford, CA 94305.
%maps[e81,jmc]

\item{481.} 
{\bf McCarthy, John (1983)}: ``AI Needs more Emphasis on Basic Research'', {\it AI
Magazine}, Volume 4, Number 4, Winter 1983.
%presid.1[f83,jmc]

\item{49.} 
{\bf McCarthy, John (1983)}: ``Some Expert Systems Need Common Sense'', paper
presented at New York Academy of Sciences Symposium.
%common[e83,jmc]

\item{50.} 
{\bf McCarthy, John (1983)}: ``The Little Thoughts of Thinking Machines'',
{\it Psychology Today}, Volume 17, Number 12, December 1983.
%psycho.4[e83,jmc]

\item{51.} 
{\bf McCarthy, John (1984)}:
``Applications of Circumscription to Formalizing Common Sense Knowledge''.
This is has been accepted by the 1984 AAAI
conference on non-monotonic reasoning, which will not have a proceedings
and is being submitted for publication to {\it Artificial Intelligence}.
\vfill\eject
\subsection{Biography of Lester Earnest}
\newcount\psectCounter
\def\psect#1{ 
   \global\advance\psectCounter1
   \par
   \penalty-200\bigskip
   \leftline{\bf \number\psectCounter.\enskip  #1}\nobreak}

\def\itmindent{20pt}
\def\itmtic{$\bullet$}
\def\hang{\hangindent\itmindent}

%\psect{Personal Information}
%\smallskip
%\beginlsverbatim%
%Home address: 12769 Dianne Drive, Los Altos Hills, CA 94022
%Home Phone: 415 941-3984
%Citizenship:  U.S.A.
%\endlsverbatim

\disleft 80truept:{{\bf
Education:}}:
B.S. Electrical Engineering, California Institute of Technology, 1953\hfil\break
U.S. Navy Aviation Electronics Officers School, 1954\hfil\break
M.S. Electrical Engineering, MIT, 1960\hfil\break

\noindent{\bf
Professional experience:}

\disleft 80truept:{}:
April 1985 - present: Department of Computer Science; Stanford University;
Senior Research Associate and Associate Chairman of Computer Science Dept.

\disleft 80truept:{}:
1980-85: Founding President of Imagen Corporation,
Santa Clara, California.  Also V.P. of Advanced Systems development and
of manufacturing.

\disleft 80truept:{}:
1965-80: Executive Officer, Artificial Intelligence
Laboratory, Stanford University; also Lecturer and Senior Research
Associate in Computer Science.

\disleft 80truept:{}:
1963-65: Department Head, Information Systems Dept., Mitre Corp.,
Washington, D. C.

\disleft 80truept:{}:
1959-63: Associate Head, Intelligence Systems Dept., Mitre Corp.,
Bedford, Mass.

\disleft 80truept:{}:
1956-58: Staff Member, MIT Lincoln Lab., Lexington, Mass.

\disleft 80truept:{}:
1954-56: Digital Computer Project Officer, Aeronautical Computer Lab., Naval
Air Development Center, Johnsville, Pa.  Also LTJG, USNR.

\noindent
{\bf Professional Responsibilities:}

\item{1.}Local Arrangements Chairman, Third International Joint Conference on
Artificial Intelligence (IJCAI), Stanford, 1973.
\item{2.}Chairman, ACM Special Interest Group on Artificial Intelligence (SIGART), 1974-76.
\item{3.}Secretary-Treasurer, International Joint Conference on Artificial Intelligence,
Tbilisi, USSR, 1975.
\item{4.}Secretary-Treasurer, International Joint Conference on Artificial Intelligence,
MIT, 1977.
\item{5.}Treasurer, National Conference of the American Association for Artificial
Intelligence, Stanford, 1980.
\item{6.}Member, Board of Directors, Imagen Corporation, 1980-present.

\noindent{\bf
Society membership:}

\disleft 30truept:{}:
American Association for Artificial Intelligence\hfil\break
Association for Computing Machinery\hfil\break
Institute of Electrical and Electronic Engineers\hfil\break
Sigma Xi

\noindent
{\bf Selected Papers:}
\item{1.}``Kutta Integration with Error Control'',
{\it ACM National Conference,}
1957.
\item{2.}``Machine Recognition of Cursive Writing'',
{\it Information Processing 62}
(Proceedings of IFIP Congress 1962, Munich), North-Holland, Amsterdam, 1963.
\item{3.}Later work on cursive writing reported in N. Lindgren, ``Machine
Recognition of Human Language, Part III - Cursive Script Recognition'',
{\it IEEE Spectrum,}
May 1965.
\item{4.}(With John McCarthy, D.R. Reddy, and P. Vicens) ``A Computer with
Hands, Eyes, and Ears'',
{\it AFIPS Vol. 33 (Proc. FJCC),}
Thompson, Washington D.C., 1968.
\item{5.}(With John McCarthy), ``DIALNET and Home Computers'',
{\it Proc. First West Coast Computer Faire,}
San Francisco, April 1977.
\item{6.}``A look back at an office of the future'' in
{\it Decision Support Systems: Issues and Challenges,}
Pergamon Press, Oxford, England, 1981.

\noindent
{\bf Other interests:}

\item{1.}Editor,
{\it California Yumyum}
(a restaurant guide), 1973-present.
\item{2.}Editor,
{\it U.S. Cycling Federation Rulebook}
(bicycle racing rules for the U.S.), 1977-1985.
\item{3.}Director, U.S. Cycling Federation, 1979-1985 (USCF administers amateur
bicycle racing in the U.S.A.).
\item{4.}Chairman, Board of Control, U.S. Cycling Federation, 1979-81,
(supervised all bicycle racing in the U.S.A.)
\item{5.}Delegate to the U.S. Olympic Committee, 1980-1985.
\item{6.}Chairman, Technical Commission, U.S. Cycling Federation, 1983-1985.
\item{7.}Director of the Road (cycling official), Olympic Games, Los Angeles,
1984.

\vfill\eject
\subsection{Biography of Vladimir Lifschitz}

\disleft 60pt:
{{\bf Education:}}: 
          
\disleft 60pt:
{1971}:		Ph.D. in Mathematics from the Steklov Mathematical Institute,
 		Leningrad, USSR.
\disleft 60pt:
{1968}:		Degree in Mathematics (B.S., M.S.equivalent) from Leningrad
 		University, Leningrad, USSR.

\disleft 60pt:
{{\bf Professional Experience:}}:

\disleft 60pt:
{1985-now}:	Senior Research Associate, Computer Science Department,
 		Stanford University.

\disleft 60pt:
{1984-85}:	Associate Professor of Mathematics and Computer Science,
 		San Jose State University.

\disleft 60pt:
{1982-84}:	Associate Professor, University of Texas -- El Paso.

\disleft 60pt:
{1979-82}:	Assistant Professor, University of Texas -- El Paso.

\disleft 60pt:
{1977-79}:	Visiting Assistant Professor, Brigham Young University.

\disleft 60pt:
{1976-77}:	Lecturer and Research Associate, Stanford University.

\disleft 60pt:
{1971-74}:	Research Associate, Institute of Mathematics and Economics,
 		Leningrad, USSR.

\noindent{\bf Publications}
\bigskip
\item{1.} 
``The normal form for deductions in predicate calculus with equality
and function symbols'',
{\sl Seminars in Mathematics} {\bf 4}, Leningrad: Steklov
Math.~Institute, 1967. {\sl Math.~Rev.}~{\bf 38} $\# 5583$.
\smallskip
\item{2.} ``Certain reduction classes and undecidable theories'',
{\sl Seminars in Mathematics} {\bf 4}, Leningrad: Steklov
Math.~Institute, 1967. {\sl Math.~Rev.}~{\bf 38} $\# 5603$.
\smallskip
\item{3.} ``Deductive validity and reduction classes'',
{\sl Seminars in Mathematics} {\bf 4}, Leningrad: Steklov
Math.~Institute, 1967. {\sl Math.~Rev.}~{\bf 38} $\# 5604$.
\smallskip
\item{4.} ``The decision problem for some constructive theories of equality'',
{\sl Seminars in Mathematics} {\bf 4}, Leningrad: Steklov
Math.~Institute, 1967. {\sl Math.~Rev.}~{\bf 38} $\# 65$.
\smallskip
\item{5.} ``The decision problem for some constructive theories of equality'',
{\sl Seminars in Mathematics} {\bf 4}, Leningrad: Steklov
Math.~Institute, 1967. {\sl Math.~Rev.}~{\bf 38} $\# 4281$.
\smallskip
\item{6.} ``Constructive mathematical theories consistent with classical logic'',
{\sl Proc. Steklov Math.~Institute} {\bf 98}, 1968. {\sl Math.~Rev.}~{\bf 36}
$\# 6265$.
\smallskip
\item{7.} ``A refinement of the form of deductions in predicate calculus with
equality and function symbols'',
{\sl Proc.~Steklov Math.~Institute} {\bf 98}, 1968. {\sl Math.~Rev.}~{\bf 40}
$\# 5415$.
\smallskip
\item{8.} ``Constructive analytic functions of the real variable'',
{\sl Seminars in Mathematics} {\bf 8}, Leningrad: Steklov
Math.~Institute, 1968. {\sl Math.~Rev.}~{\bf 43} $\# 7323$.
\smallskip
\item{9.} ``On a set of zeroes of a constructive power series in a real variable'',
{\sl Seminars in Mathematics} {\bf 16}, Leningrad: Steklov
Math.~Institute, 1969. {\sl Math.~Rev.}~{\bf 41} $\# 5193$.
\smallskip
\item{10.} ``The investigation of constructive functions by the method of
fillings'', {\sl Journal of Soviet Math.} {\bf 1}, 1972.
{\sl Math.~Rev.}~{\bf 48} $\# 3714$.
\smallskip
\item{11.} ``A locally analytic constructive function which is not analytic'',\hfill\break
{\sl Soviet Math.~Dokl.}~{\bf 13}, 1972.
{\sl Math.~Rev.}~{\bf 45} $\# 5320$.
\smallskip
\item{12.} ``A non-compact closed sphere in a compact constructive metric space'',
{\sl Seminars in Mathematics} {\bf 32}, Leningrad: Steklov
Math.~Institute, 1972. (With V.~P.~Chernov). 
{\sl Math.~Rev.}~{\bf 49} $\# 1487$.
\smallskip
\item{13.} ``$CT↓0$ is stronger than $CT↓0!$'', {\sl Proc. Amer. Math. Soc.}
{\bf 73}(1), 1979.
\smallskip
\item{14.} ``An intuitionistic definition of classical natural numbers'',
{\sl Proc. Amer. Math. Soc.}
{\bf 77}(3), 1979.
\item{15.} ``Semantical completeness theorems in logic and algebra'',
{\sl Proc. Amer. Math. Soc.}
{\bf 79}(1), 1980.
\smallskip
\item{16.} ``The efficiency of an algorithm of integer programming:
a probabilistic analysis'',
{\sl Proc. Amer. Math. Soc.}
{\bf 79}(1), 1980.
\smallskip
\item{17.} ``The number of increasing subsequences of the random permutation'',
{\sl Journal of Combinatorial Theory}, Ser.~A,
{\bf 31}, 1981. (With B. Pittel).
\smallskip
\item{18.} ``Constructive assertions in an extension of classical mathematics'',
{\sl Journal of Symbolic Logic}
{\bf 47}(2), 1982.
\smallskip
\item{19.} ``A note on the complexity of a partition algorithm'',
{\sl Information Processing Letters}
{\bf 17}, 1983. (With L. Pesotchinsky).
\smallskip
\item{20.} ``The worst-case performance and the most probable performance of a
class of set-covering algorithms'',
{\sl SIAM Journal on Computing}
{\bf 12}(2), 1983. (With B.~Pittel).
\smallskip
\item{21.} ``On verification of programs with goto statements'',
{\sl Information Processing Letters}
{\bf 18}, 1984.
\smallskip
\item{22.} ``Some results on circumscription'', in:
{\sl Non-Monotonic Reasoning Workshop}, New Paltz, 1984.
\smallskip
\item{23.} ``Calculable natural numbers'', in:
{\sl Intensional Mathematics}, North-Holland, 1985.
\smallskip
\item{24.} ``Computing circumscription'', in:
{\sl Proc. IJCAI-85}, 1985.
\smallskip
\item{25.} ``Closed-world data bases and circumscription'',
{\sl Artificial Intelligence}, {\bf 27}, 1985.
\smallskip
\item{26.} ``On the satisfiability of circumscription'',
{\sl Artificial Intelligence}, {\bf 28}, 1986.
\smallskip
\item{27.} {\sl Mechanical Theorem Proving in the USSR: Leningrad School},
Delphic Associates, 1986.
\smallskip
\item{28.} ``On the semantics of STRIPS'', {\sl
Workshop on Planning and Reasoning about Action}, Timberline, 1986.
\smallskip
\item{29.} ``Pointwise circumscription: Preliminary Report'',
{\sl Proc. AAAI-86}, 1986.
\smallskip
\item{30.} ``On the declarative semantics of logic programs with negation'',
{\sl Workshop on Foundations of Logic Programming and Deductive Databases},
Washington, D.C., 1986.



\vfill\eject

\subsection{Biography of Carolyn Talcott}
%\centerline{\bf Biography of Carolyn Talcott}

\beginlsverbatim
{\bf Born:}
        June 14, 1941 in Caldwell, Idaho


{\bf Education:}
        B.S.  (Chemistry) University of Denver, Denver Colorado, 1962
        Ph.D. (Chemistry) University of California, Berkeley 1966
        Ph.D. (Computer Science) Stanford University 1985


{\bf Honors, Fellowships and Societies:}
        National Honor Society (1958)
        Phi Beta Kapa(1962)
        NSF Postdoctoral Fellowship (1968-1969)
        IBM Graduate Fellowship (1978-1979)
        Association for Symbolic Logic


{\bf Professional Experience:}
Teaching and research in chemistry, mathematics and computer science.

        Teaching Assistant in Chemistry, 1960, College of Idaho
        Laboratory Assistant in Analyitcal Chemistyr, 1960--1963
                Denver Research Institute
        Teaching Assistant in Mathematics, 1963, University of Denver
        Research Assistant in Chemistry, 1964-66, 
                University of California, Berkeley 
        Computer applications to theoretical chemistry, 
                PostDoctoral Fellow, 1968, Cambridge Universtity, Cambridge England
        Research in finite groups and computer applications to mathematics.
                Research Associate and Teaching Assistant in Mathematics, 1969-75,
                University of California,Santa Cruz 
        Teaching Assistant, 1977-1979, Stanford University
        Research Assistant at the Stanford Artificial Intelligence Laboratory, 
                Stanford University, 1977--1985  
\vfill\eject
        Summer Student Visitor, IBM Watson Research Laboratory, Summer 1979
        Design and implementation of SEUS programming language,
                Consultant, PERSEUS Inc., 1980-present
        Lecturer in Computer Science, Stanford University 1985-86
        Research Associate, Computer Science Department, Stanford University,
                1985--present


{\bf Other Interests:}  Chamber music (flute), baroque perfomance practice.

\endlsverbatim
%\bigbreak\centerline{\bf Publications}\nobreak\smallskip
%\bigbreak{\bf Publications}\nobreak\smallskip

\begingroup
\parskip= 4pt plus 1pt % half normal baseline??
\parindent0pt

Note - some publications as C.T. Williamson.

\pubitm{1.}{Katz, T. J. and Talcott, C. L. (1966)}
``The Cyclononatetraene Anion Radical'', 
J. Am. Chem. Soc. 88,
p. 4732.

\pubitm{2.}{ Meyers, R. J. and Talcott, C. L. (1967)}:
``Electron Spin Resonance of the Radical Anions of Pyridine and Related Nitrogen 
Heterocyclics'', 
Mol. Phys. 12, pp. 549--567.

\pubitm{3.}{ Talcott, Carolyn (1967)}: 
``Electron Spin Resonance Studies of Radicals Produced by Electrolysis''
 Ph.D. Thesis, University of California, Berkeley (1967).

\pubitm{4.}{ Amaral, A. M. S. C., Linnett, J. W. and Williamson, C. T. (1970)}
``The Double Bond in Ethylene'',
Theoret. Chim. Acta(Berlin) 16, 
pp. 249--262.

\pubitm{5.}{ Burgoyne, N. and Williamson, C. (1971)}
``Some Computations Involving Simple Lie Algebras'',
in {\it Proceedings of the Second ACM Symposium on Symbolic and Algebraic 
Manipulation},
pp. 162--171.

\pubitm{6.}{ Maurer, W. D. and Williamson, C. T. (1971)}
``Algorithm Verification Applied to the Todd-Coxeter Algorithm'', 
Electronics Research Lab., College of Engineering, U.C. Berkeley, Memorandum 
ERL-M317.

\pubitm{7.}{ Burgoyne, N. and Williamson, C. (1976)}
``On a theorem of Borel and Tits for Finite Chevalley Groups''. 

\pubitm{8.}{ Burgoyne, N. and Williamson, C. (1977)} 
``Semi-simple Classes in Chevalley Type Groups'', 
Pacific Journal of Mathematics, 70 
pp. 83--100.

\pubitm{9.}{ McCarthy, J. and Talcott, C. (1980)}
{\it LISP - Programming and Proving}, 
course notes, Stanford University. (to be published as a book).


\pubitm{10.}{ Mason, I. and Talcott, C. (1985)}
``Memories of S-expressions: Proving properties of Lisp-like programs
that destructively alter memory.''
Report No. STAN-CS-85-1057,
Computer Science Department, Stanford Univiserity. 


\pubitm{11.}{ Talcott, C. (1985)}
``The Essence of Rum: 
   A theory of intensional and extensional aspects  of Lisp-type computation.''
PhD Thesis, Stanford University.


\endgroup
\vfill\eject
\subsection{Biography of Natarajan Shankar}
\medskip
\disleft 85pt:
{{\bf Born:}}:August 30, 1959.\hfil\break

\disleft 85pt:
{{\bf Research Goals:}}:To study the construction, mechanization,
and use of logics in program verification and artificial intelligence.

\disleft 80pt:
{{\bf Education:}}:

\disleft 80pt:
{1980--1986}:{\it Ph.D. Student in Computer Science,\/}\hfil\break
The University of Texas at Austin.\hfil\break

\disleft 80pt:
{1975--1980}:{\it Bachelor of Technology in Electrical Engineering,\/}\hfil\break
Indian Insitute of Technology, Madras, India.

\disleft 80pt:
{{\bf Ph.D. Topic:}}:{\it Proof-checking Metamathematics\/}: The goal of this 
research is to check the proofs of several important theorems in 
metamathematics using the Boyer-Moore theorem prover.  Metamathematical 
reasoning plays a crucial role in extending formal systems in a
sound and efficient manner.  The significant theorems proved
include the Church-Rosser theorem of {\it Lambda Calculus\/}, and a version of
the Godel incompleteness theorem for Cohen's set theory $Z↓2$. 

\disleft 80pt:
{{\bf Professional Experience:}}:

\disleft 80pt:
{11/86 -- }:Research Associate, Formal Reasoning Group, Stanford University

\disleft 80pt:
{9/84 -- 10/86}:Research Assistant, Institute for Computing
Science, University of Texas at Austin.  Working on mechanical proofs in 
metamathematics.

\disleft 80pt:
{7/84 -- 9/84}:Visiting Fellow, University of Cambridge, England.

\disleft 80pt:
{1/83 -- 6/83}:Assistant Instructor, University of Texas at Austin.  Taught
the Introductory Fortran course, CS 304F.

\disleft 80pt:
{9/80 -- 12/82}:Teaching Assistant, University of Texas at Austin.  Taught
laboratory sections on Data Structures and PASCAL.

\disleft 80pt:
{{\bf Honors:}}:Govt. of India National Science Talent Scholarship, 1975.

\disleft 80pt:{}:
University of Texas Graduate Fellowship, 1983/84.

\disleft 80pt:{}:
Science and Engineering Research Council (UK) Visiting Fellowship, 1984.

\vfill\eject
\noindent
{\bf Publications:}

\item{1.}
{\bf Towards Mechanical Metamathematics},\hfil\break
{\it Journal of Automated Reasoning,\/} Vol. 1 No. 4, 1985.

\item{2.}
{\bf A Mechanical Proof of the Church-Rosser Theorem}.\hfil\break
{\it To be published in the Journal
of the ACM.}

\item{3.}
{\bf Checking the proof of Godel's Incompleteness Theorem},\hfil\break
University of Texas Institute for Computing Science Technical Report.


\vfill
\eject

\subsection{Biography of Yoav Shoham}
% see shoham.tex[1,clt] for latex source

%Yale University \\                              
%Computer Science Department \\
%10 Hillhouse Avenue       \\
%New Haven, Ct. 06520  \\

\disleft 80pt:{{\bf Education:}}:
Yale University, New Haven, Connecticut\hfil\break
           Ph.D., expected, 1986               \hfil\break
           Dissertation area: Computer Science / Artificial Intelligence\hfil\break
           Dissertation title: Reasoning about Change\hfil\break
           Technion, Haifa, Israel\hfil\break
           B.A. Computer Science, 1981, with distinction\hfil\break

\disleft 80pt:
{{\bf Professional Experience:}}:
\disleft 80pt:{}:
Assistant Professor in Computer Science\hfil\break
Stanford University\hfil\break
Starting in April 1987\hfil\break
\disleft 80pt:{}:
Visiting Scientist\hfil\break
Applied Mathematics Department\hfil\break
Weizmann Institute of Science, Rehovot, Israel\hfil\break
January-March 1987\hfil\break
\disleft 80pt:{}:
Research Assistant in Computer Science\hfil\break
Yale University\hfil\break
1982-1986 

\disleft 80pt:{}:
Teaching Assistant in Computer Science\hfil\break
Yale University\hfil\break
1982-1983 


\disleft 80pt:{{\bf Visits, grants and awards:}}:
\disleft 80pt:
{1985}: Visited the AI Center at SRI 
International for about six weeks, working with Dr. Stanley Rosenschein.
       
\disleft 80pt:
{1984}: Invited to Japan to participate in the FGCS conference, visit ICOT
and the Science University of Tokyo for a total duration of one month. 
       
\disleft 80pt:
{1984}: Participated in Commonsense Summer, a project organized by Dr. 
Jerry Hobbs at the
AI Center of SRI International.

\disleft 80pt:
{1982}: Received a Fulbright travel grant for travel to the U.S.
      
\disleft 80pt:
{1980,1981}: Received the Technion President Award 

\vfill\eject
\noindent
{\bf Publications:}

\item{1.}
{\it FAME: A Prolog Program That Solves  Problems
 in Combinatorics\/}, Y. Shoham, 
 Proc. Second International Logic Programming Conference,
 Uppsala, Sweden, 1984.

\item{2.}
{\it Knowledge Inversion\/},
 Y. Shoham and D.V. McDermott, 
 Proc. AAAI,
 Austin, Texas, 1984.

\item{3.}
{\it Prolog Predicates as Denoting Directed
 Relations\/}, Y. Shoham and D.V. McDermott,
 Proc. FGCS, Tokyo, Japan, 1984.

\item{4.}
{\it Temporal Notation and Causal Terminology\/},
Y. Shoham and T. Dean, 
Proc. Seventh Annual Conference of the Cognitive Science Society,
Irvine, California, 1985.

\item{5.}
{\it Naive Kinematics: One Aspect of Shape\/},
Y. Shoham,  Proc. 9th IJCAI, Los Angeles, California, 1985.
                    
\item{6.}
{\it Reasoning about Causation in Knowledge-Based Systems\/},
Y. Shoham, Proc. IEEE 
 Conference on  Artificial Intelligence Applications, Miami,
Florida, 1985.

\item{7.}
{\it Ten Requirements from a Theory of Change\/}, Y. Shoham,
Journal of New Generation Computing 3(4), special issue on 
knowledge representation, 1985.

\item{8.}
{\it Temporal Reasoning\/}, Y. Shoham and D.V. McDermott,
in {\it The Encyclopedia of Artificial Intelligence\/}, Shapiro, S.C.
(Ed.), Wiley-Interscience, New York (to appear).

\item{9.}
{\it A Propositional Modal Logic of Time Intervals\/} (short version), J.Y. Halpern and Y. Shoham,
Proc. IEEE Symp. on Logic in Computer Science, Boston, MA, June 1986.

\item{10.}
{\it Reified Temporal Logics: semantical and ontological considerations}, Y. Shoham,
Proc. 7th ECAI, Brighton, U.K., July 1986 (best-paper award).  Revised version
to appear in the {\it Journal of Artificial Intelligence}, under the title
{\it Temporal Logics in AI}.

\item{11.}
{\it Chronological Ignorance: time, knowledge, nonmonotonicity and causal theories,} 
Y. Shoham, Proc. AAAI, Philadelphia, PA, 1986.  

\vfill\eject

\section{References}

\medskip
{\bf Boyer, R. S. and J. S. Moore (1979)}:
{\it A computational logic\/} 
(Academic Press, New York).

{\bf R. L. Constable, et.al. (1986)}: 
Implementing Mathematics.  Prentice-Hall, New
Jersey.

{\bf de Kleer, J. (1986)}:
An assumption-based TMS,
{\it Artificial Intelligence} {\bf 28}, 127-162.

{\bf Doyle, J. (1979)}:
A truth maintenance system,
{\it Artificial Intelligence} {\bf 12}, 231-272.

{\bf Feferman, S.(1975)}:
Non-extensional type-free theories of partial operations and classifications, I.
in: {\it Proof theory symposium, Kiel 1974},
edited by J. Diller and G. H. M\"uller, Lecture notes in mathematics, no. 500
(Springer, Berlin)

{\bf Felleisen, M. and Friedman, D. P.
(1986)}:
Control operators, the SECD-machine, and the $\lambda$-calculus,
in: {\it Proceedings of the conference on formal description of programming
  concepts part III. Ebberup Denmark, August 1986}.

{\bf Felleisen, M.,  Friedman, D. P., Kohlbecker E., and Druba B.
(1986)}:
Reasoning with continuations,
in: {\it Proceedings: symposium on logic in computer science, Boston June 1986}
pp. 131--141.

{\bf M. J. Gordon, A. J. Milner and C. P. Wadsworth (1979)}: 
Lecture Notes in Computer
Science.  Volume 78:  Edinburgh LCF.  Spring-Verlag, Berlin.

{\bf Ketonen, J.
(1984)}:
EKL -- a mathematically oriented proof checker,
in: {\it Seventh International Conference on Automated Deduction, Napa, California},
edited by R. E. Shostak,
Lecture notes in computer science, no. 170 
(Springer, Berlin).

{\bf Kleene, S. C.
(1952)}:
{\it Introduction to metamathematics},
(North-Holland, Amsterdam).

{\bf Knuth, D. E. (1984)}: "Literate Programming".  The Computer Journal 27, 2,
97-111.

{\bf Landin, P. J.(1964)}:
The mechanical evaluation of expressions, 
Computer Journal, 6,
pp. 308--320. 

{\bf Landin, P. J.(1965)}:
A correspondence between Algol60 and Church's lambda notation,
Comm. ACM, 8, 
pp. 89--101, 158--165. 

{\bf Landin, P. J.(1966)}:
The next 700 programming languages, 
Comm. ACM,  9, 
pp.  157--166. 

{\bf Lifschitz, V. (1985)}:
Computing circumscription, {\it Proc. 9th International Joint
Conference on Artificial Intelligence} {\bf 1}, %1985, 
121--127.

%{\bf Lifschitz, V. (1986)}:
%Pointwise circumscription: Preliminary report,
%{\it Proc. AAAI-86} {\bf 1} 1986, 406-410.

{\bf Lifschitz, V. (1987)}:
Formal theories of action, {\it Proceedings of the Workshop on the Frame Problem
in Artificial Intelligence} (to appear).

{\bf Lifschitz, V. (1987a)}:
On the declarative semantics of logic programs with negation,
{\it Proceedings of the Workshop on the Foundations of Logic Programming and
Deductive Databases} (to appear).

{\bf Mason, I.A. and Talcott, C.L. (1985)}:
Memories of S-expressions: Proving properties of Lisp-like programs
that destructively alter memory,
Department of Computer Science, Stanford University Report No. STAN-CS-85-1057

{\bf Mason, I.A. (1986a)}:
Equivalence of first order Lisp programs: proving properties of destructive
    programs via transformation,
in: Symposium on logic in computer science, Cambridge Mass. June 1986 (IEEE)
pp. 105--117.

{\bf Mason, I.A. (1986)}:
The semantics of destructive Lisp,
PhD Thesis, Stanford University.

{\bf McCarthy, John (1960)}: Programs with Common Sense, in Proceedings of the
Teddington Conference on the Mechanization of Thought Processes, Her Majesty's
Stationery Office, London.

{\bf J. McCarthy (1962)}: 
Computer Programs for Checking Mathematical Proofs.
Recursive Function Theory, Proceedings of a Symposium in Pure Mathematics,
Providence, Rhode Island, pp. 219-227.

{\bf McCarthy, J. (1963)}:
A basis for a mathematical theory of computation,
in: {\it Computer programming and formal systems},
edited by P. Braffort and D. Herschberg
(North-Holland, Amsterdam)
pp. 33--70.

{\bf McCarthy, J. and Hayes, P.(1969)}: 
Some philosophical problems from the standpoint
of artificial intelligence, in: Meltzer, B. and Michie, D. (Eds.),
{\it Machine Intelligence} {\bf 4} (Edinburgh University Press,
Edinburgh), 463-502.

{\bf McCarthy, John (1977)}:
Epistemological Problems of Artificial Intelligence, {\it Proceedings
of the Fifth International Joint Conference on Artificial 
Intelligence}, M.I.T., Cambridge, Mass.

{\bf McCarthy, John (1980)}: 
Circumscription --- A Form of Non-Monotonic Reasoning, {\it Artificial
Intelligence} {\bf 13}, 27-39.

{\bf McCarthy, J. (1986)}: 
Applications of circumscription to formalizing commonsense
knowledge, {\it Artificial Intelligence} {\bf 28}, 89-118.

{\bf Moschovakis Y. N. (1969)}:
Abstract first order computability I,
{\it Trans. Am. Math. Soc.}, {\bf 138},
pp. 427--464.

{\bf 
Platek, R. A.(1966)}:
{\it Foundations of recursion theory\/},
Ph.D. thesis, Stanford University.

{\bf 
Plotkin, G.
(1975)}:
Call-by-name, call-by-value and the lambda-v-calculus,
{\it Theoretical Computer Science}, {\bf 1},
pp. 125--159.

{\bf 
Plotkin, G.
(1977)}:
LCF considered as a programming language,
{\it Theoretical Computer Science\/}, {\bf 5}.
pp. 223--255.

{\bf Przymusinski, T. (1986)}: 
Query answering in circumscriptive and closed-world
theories, {\it Proc. AAAI-86} {\bf 1}, 186-190.

{\bf Reiter, R. (1980)}: A Logic for Default Reasoning,
{\it Artificial
Intelligence} {\bf 13}, 81-132.

{\bf N. Shankar (1985)}: 
``Towards Mechanical Metamathematics''.  Journal of Automated
Reasoning {\bf 1}, No. 4.

{\bf 
Smorynski, C.
(1977)}:
The incompleteness theorems,
in: Barwise, J. (ed.), 
{\it Handbook of mathematical logic\/}
(North-Holland, Amsterdam).
pp. 821--865.

{\bf 
Steele,G.L. and G.J.Sussman (1976)}:
LAMBDA the Ultimate Imperative,
MIT AI Memo 353.

{\bf 
Talcott, C.L.(1985)}:
The Essence of Rum: A theory of the intensional and extensional 
   aspects  of Lisp-type computation,
Ph.D. Thesis, Stanford University.

{\bf 
Wegner, P.
(1972)}:
The Vienna definition language,
{\it Computing Surveys}, {\bf 4}, 
pp. 5--63.

{\bf 
Weyhrauch, R. W.
(1980)}:
Prolegomena to a theory of formal reasoning,
{\it Artificial Intelligence}, {\bf 13},
pp. 133--170.

\vfill\eject
\bye

% now we make our own control sequences for 0 and 1
\catcode `@ = 11
\let\pt = \p@
\let\zpt = \z@
\let\zeroskip = \z@skip
\catcode `@ = 12

%\def\noblackbox{\overfullrule \zpt}
\newcount\psectCounter
\def\psect#1{ 
   \global\advance\psectCounter1
   \par
   \penalty-200\bigskip
   \leftline{\bf \number\psectCounter.\enskip  #1}\nobreak}

\def\itmindent{20pt}
\def\itmtic{$\bullet$}
\def\hang{\hangindent\itmindent}

\cheader{Biography of Lester Earnest}


\psect{Personal Information}
\smallskip
\beginlsverbatim%
Home address: 12769 Dianne Drive, Los Altos Hills, CA 94022
Home Phone: 415 941-3984
Citizenship:  U.S.A.
\endlsverbatim

\psect{Education}
\smallskip
\beginlsverbatim%
B.S. Electrical Engineering, California Institute of Technology, 1953
U.S. Navy Aviation Electronics Officers School, 1954
M.S. Electrical Engineering, MIT, 1960
\endlsverbatim

\hangafter0

\psect{Professional experience}
\noindent
April 1985 - present: Department of Computer Science; Stanford University;
Senior Research Associate and Associate Chairman of Computer Science Dept.

\noindent
1980-85: Founding President of Imagen Corporation,
Santa Clara, California.  Also V.P. of Advanced Systems development and
of manufacturing.

\noindent
1965-80: Executive Officer, Artificial Intelligence
Laboratory, Stanford University; also Lecturer and Senior Research
Associate in Computer Science.

\noindent
1963-65: Department Head, Information Systems Dept., Mitre Corp.,
Washington, D. C.

\noindent
1959-63: Associate Head, Intelligence Systems Dept., Mitre Corp.,
Bedford, Mass.

\noindent
1956-58: Staff Member, MIT Lincoln Lab., Lexington, Mass.

\noindent
1954-56: Digital Computer Project Officer, Aeronautical Computer Lab., Naval
Air Development Center, Johnsville, Pa.  Also LTJG, USNR.

\hangafter1
\psect{Professional responsibilities}
\itm Local Arrangements Chairman, Third International Joint Conference on
Artificial Intelligence (IJCAI), Stanford, 1973.
\itm Chairman, ACM Special Interest Group on Artificial Intelligence (SIGART), 1974-76.
\itm Secretary-Treasurer, International Joint Conference on Artificial Intelligence,
Tbilisi, USSR, 1975.
\itm Secretary-Treasurer, International Joint Conference on Artificial Intelligence,
MIT, 1977.
\itm Treasurer, National Conference of the American Association for Artificial
Intelligence, Stanford, 1980.
\itm Member, Board of Directors, Imagen Corporation, 1980-present.

\psect{Society membership}%
\smallskip
\beginlsverbatim%
American Association for Artificial Intelligence
Association for Computing Machinery
Institute of Electrical and Electronic Engineers
Sigma Xi
\endlsverbatim

\psect{Selected Papers}
\itm ``Kutta Integration with Error Control'',
{\it ACM National Conference,}
1957.
\itm ``Machine Recognition of Cursive Writing'',
{\it Information Processing 62}
(Proceedings of IFIP Congress 1962, Munich), North-Holland, Amsterdam, 1963.
\itm Later work on cursive writing reported in N. Lindgren, ``Machine
Recognition of Human Language, Part III - Cursive Script Recognition'',
{\it IEEE Spectrum,}
May 1965.
\itm (With John McCarthy, D.R. Reddy, and P. Vicens) ``A Computer with
Hands, Eyes, and Ears'',
{\it AFIPS Vol. 33 (Proc. FJCC),}
Thompson, Washington D.C., 1968.
\itm (With John McCarthy), ``DIALNET and Home Computers'',
{\it Proc. First West Coast Computer Faire,}
San Francisco, April 1977.
\itm ``A look back at an office of the future'' in
{\it Decision Support Systems: Issues and Challenges,}
Pergamon Press, Oxford, England, 1981.

\psect{Other interests}
\itm Editor,
{\it California Yumyum}
(a restaurant guide), 1973-present.
\itm Editor,
{\it U.S. Cycling Federation Rulebook}
(bicycle racing rules for the U.S.), 1977-1985.
\itm Director, U.S. Cycling Federation, 1979-1985 (USCF administers amateur
bicycle racing in the U.S.A.).
\itm Chairman, Board of Control, U.S. Cycling Federation, 1979-81,
(supervised all bicycle racing in the U.S.A.)
\itm Delegate to the U.S. Olympic Committee, 1980-1985.
\itm Chairman, Technical Commission, U.S. Cycling Federation, 1983-1985.
\itm Director of the Road (cycling official), Olympic Games, Los Angeles,
1984.